#define TCR_TxSZ_MAX ULL(39)
#define TCR_TxSZ_MAX_TTST ULL(48)
+#define TCR_T0SZ_SHIFT U(0)
+#define TCR_T1SZ_SHIFT U(16)
+
/* (internal) physical address size bits in EL3/EL1 */
#define TCR_PS_BITS_4GB ULL(0x0)
#define TCR_PS_BITS_64GB ULL(0x1)
#define TCR_SH_OUTER_SHAREABLE (ULL(0x2) << 12)
#define TCR_SH_INNER_SHAREABLE (ULL(0x3) << 12)
+#define TCR_RGN1_INNER_NC (ULL(0x0) << 24)
+#define TCR_RGN1_INNER_WBA (ULL(0x1) << 24)
+#define TCR_RGN1_INNER_WT (ULL(0x2) << 24)
+#define TCR_RGN1_INNER_WBNA (ULL(0x3) << 24)
+
+#define TCR_RGN1_OUTER_NC (ULL(0x0) << 26)
+#define TCR_RGN1_OUTER_WBA (ULL(0x1) << 26)
+#define TCR_RGN1_OUTER_WT (ULL(0x2) << 26)
+#define TCR_RGN1_OUTER_WBNA (ULL(0x3) << 26)
+
+#define TCR_SH1_NON_SHAREABLE (ULL(0x0) << 28)
+#define TCR_SH1_OUTER_SHAREABLE (ULL(0x2) << 28)
+#define TCR_SH1_INNER_SHAREABLE (ULL(0x3) << 28)
+
#define TCR_TG0_SHIFT U(14)
#define TCR_TG0_MASK ULL(3)
#define TCR_TG0_4K (ULL(0) << TCR_TG0_SHIFT)
#define TCR_TG0_64K (ULL(1) << TCR_TG0_SHIFT)
#define TCR_TG0_16K (ULL(2) << TCR_TG0_SHIFT)
+#define TCR_TG1_SHIFT U(30)
+#define TCR_TG1_MASK ULL(3)
+#define TCR_TG1_16K (ULL(1) << TCR_TG1_SHIFT)
+#define TCR_TG1_4K (ULL(2) << TCR_TG1_SHIFT)
+#define TCR_TG1_64K (ULL(3) << TCR_TG1_SHIFT)
+
#define TCR_EPD0_BIT (ULL(1) << 7)
#define TCR_EPD1_BIT (ULL(1) << 23)
/*
- * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
/* Inner & outer non-cacheable non-shareable. */\
tcr = TCR_SH_NON_SHAREABLE | \
TCR_RGN_OUTER_NC | TCR_RGN_INNER_NC | \
- (uint64_t) t0sz; \
+ ((uint64_t)t0sz << TCR_T0SZ_SHIFT); \
} else { \
/* Inner & outer WBWA & shareable. */ \
tcr = TCR_SH_INNER_SHAREABLE | \
TCR_RGN_OUTER_WBA | TCR_RGN_INNER_WBA | \
- (uint64_t) t0sz; \
+ ((uint64_t)t0sz << TCR_T0SZ_SHIFT); \
} \
tcr |= _tcr_extra; \
write_tcr_el##_el(tcr); \
*/
int t0sz = 64 - __builtin_ctzll(virtual_addr_space_size);
- tcr = (uint64_t) t0sz;
+ tcr = (uint64_t)t0sz << TCR_T0SZ_SHIFT;
/*
* Set the cacheability and shareability attributes for memory
panic();
}
+ /* Setup shim layer */
+ spm_exceptions_xlat_init_context();
+
/*
* Setup all Secure Partitions.
*/
/*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
void spm_sp_request_decrease(sp_context_t *sp_ctx);
int spm_sp_request_increase_if_zero(sp_context_t *sp_ctx);
+/* Functions related to the shim layer translation tables */
+void spm_exceptions_xlat_init_context(void);
+uint64_t *spm_exceptions_xlat_get_base_table(void);
+
/* Functions related to the translation tables management */
xlat_ctx_t *spm_sp_xlat_context_alloc(void);
void sp_map_memory_regions(sp_context_t *sp_ctx);
/*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
write_ctx_reg(get_sysregs_ctx(ctx), CTX_MAIR_EL1,
mmu_cfg_params[MMU_CFG_MAIR]);
+ /* Enable translations using TTBR1_EL1 */
+ int t1sz = 64 - __builtin_ctzll(SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE);
+ mmu_cfg_params[MMU_CFG_TCR] &= ~TCR_EPD1_BIT;
+ mmu_cfg_params[MMU_CFG_TCR] |=
+ ((uint64_t)t1sz << TCR_T1SZ_SHIFT) |
+ TCR_SH1_INNER_SHAREABLE |
+ TCR_RGN1_OUTER_WBA | TCR_RGN1_INNER_WBA |
+ TCR_TG1_4K;
+
write_ctx_reg(get_sysregs_ctx(ctx), CTX_TCR_EL1,
mmu_cfg_params[MMU_CFG_TCR]);
write_ctx_reg(get_sysregs_ctx(ctx), CTX_TTBR0_EL1,
mmu_cfg_params[MMU_CFG_TTBR0]);
+ write_ctx_reg(get_sysregs_ctx(ctx), CTX_TTBR1_EL1,
+ (uint64_t)spm_exceptions_xlat_get_base_table());
+
/* Setup SCTLR_EL1 */
u_register_t sctlr_el1 = read_ctx_reg(get_sysregs_ctx(ctx), CTX_SCTLR_EL1);
* ----------------------------
*/
- /* Shim Exception Vector Base Address */
+ /*
+ * Shim exception vector base address. It is mapped at the start of the
+ * address space accessed by TTBR1_EL1, which means that the base
+ * address of the exception vectors depends on the size of the address
+ * space specified in TCR_EL1.T1SZ.
+ */
write_ctx_reg(get_sysregs_ctx(ctx), CTX_VBAR_EL1,
- SPM_SHIM_EXCEPTIONS_PTR);
+ UINT64_MAX - (SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE - 1ULL));
/*
* FPEN: Allow the Secure Partition to access FP/SIMD registers.
/*
- * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#define SPM_SHIM_EXCEPTIONS_SIZE \
(SPM_SHIM_EXCEPTIONS_END - SPM_SHIM_EXCEPTIONS_START)
+/*
+ * Use the smallest virtual address space size allowed in ARMv8.0 for
+ * compatibility.
+ */
+#define SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE (1ULL << 25)
+#define SPM_SHIM_MMAP_REGIONS 1
+#define SPM_SHIM_XLAT_TABLES 1
+
#endif /* SPM_SHIM_PRIVATE_H */
return ctx;
};
+/*******************************************************************************
+ * Translation table context used for S-EL1 exception vectors
+ ******************************************************************************/
+
+REGISTER_XLAT_CONTEXT2(spm_sel1, SPM_SHIM_MMAP_REGIONS, SPM_SHIM_XLAT_TABLES,
+ SPM_SHIM_XLAT_VIRT_ADDR_SPACE_SIZE, PLAT_PHY_ADDR_SPACE_SIZE,
+ EL1_EL0_REGIME, PLAT_SP_IMAGE_XLAT_SECTION_NAME);
+
+void spm_exceptions_xlat_init_context(void)
+{
+ /* This region contains the exception vectors used at S-EL1. */
+ mmap_region_t sel1_exception_vectors =
+ MAP_REGION(SPM_SHIM_EXCEPTIONS_PTR,
+ 0x0UL,
+ SPM_SHIM_EXCEPTIONS_SIZE,
+ MT_CODE | MT_SECURE | MT_PRIVILEGED);
+
+ mmap_add_region_ctx(&spm_sel1_xlat_ctx,
+ &sel1_exception_vectors);
+
+ init_xlat_tables_ctx(&spm_sel1_xlat_ctx);
+}
+
+uint64_t *spm_exceptions_xlat_get_base_table(void)
+{
+ return spm_sel1_xlat_ctx.base_table;
+}
+
/*******************************************************************************
* Functions to allocate memory for regions.
******************************************************************************/
void sp_map_memory_regions(sp_context_t *sp_ctx)
{
- /* This region contains the exception vectors used at S-EL1. */
- const mmap_region_t sel1_exception_vectors =
- MAP_REGION_FLAT(SPM_SHIM_EXCEPTIONS_START,
- SPM_SHIM_EXCEPTIONS_SIZE,
- MT_CODE | MT_SECURE | MT_PRIVILEGED);
-
- mmap_add_region_ctx(sp_ctx->xlat_ctx_handle,
- &sel1_exception_vectors);
-
struct sp_rd_sect_mem_region *rdmem;
for (rdmem = sp_ctx->rd.mem_region; rdmem != NULL; rdmem = rdmem->next) {